DO NOT START TO RUN THE NOTEBOOK FROM THE BEGINNING. START TO RUN THE NOTEBOOK WHERE IT SAYS "Transfer learning starts here to run the notebook!"¶

5 classes of cervical cells¶

"Dyskeratotic", "Koilocytotic": abnormal but not malignant¶

"Metaplastic": benign¶

"Parabasal", "Superficial-Intermediate": normal cells¶

In [ ]:
# Importing Necessary Libraries
import cv2
import os
import shutil
import math
import random
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt

import warnings
warnings.filterwarnings("ignore")
In [ ]:
# import shutil

# # Clear the existing mount point
# shutil.rmtree('/content/drive')

# Mount Google Drive
from google.colab import drive
drive.mount('/content/drive')
Mounted at /content/drive

Format dataset. Since we will only use "Image Features", focus on .bmp.¶

Note: in the dataset, there are Cell Features, Image Features, and Deep Features.¶

In [ ]:
# Function for Formatting Dataset
def FormatDataset(dataset_src, dataset_dest, classes):
    # Making a Copy of Dataset
    new_cropped_dest = [os.path.join(dataset_dest, cls, 'CROPPED') for cls in classes]
    new_complete_dest = [os.path.join(dataset_dest, cls, 'COMPLETE') for cls in classes]
    cropped_src = [ dataset_src + "/im_" + cls + "/im_" + cls + "/CROPPED" for cls in classes ]
    complete_src = [ dataset_src + "/im_" + cls + "/im_" + cls for cls in classes ]
    for (dest1, dest2) in zip(new_cropped_dest, new_complete_dest):
        os.makedirs(dest1, exist_ok=True)
        os.makedirs(dest2, exist_ok=True)
    # Formating Cropped Images
    for (src, new_dest) in zip(cropped_src, new_cropped_dest):
        for file in os.listdir(src):
            filename, file_ext = os.path.splitext(file)
            if file_ext == '.bmp':
                img_des = os.path.join(new_dest, filename + '.jpg')
                img = cv2.imread(os.path.join(src, file))
                img = cv2.resize(img, (64, 64))
                img = cv2.copyMakeBorder(img, 1, 1, 1, 1, cv2.BORDER_CONSTANT, value=0)
                img = cv2.blur(img, (2, 2))
                cv2.imwrite(img_des ,img)
    # Formatting Complete Images
    for (src, new_dest) in zip(complete_src, new_complete_dest):
        for file in os.listdir(src):
            filename, file_ext = os.path.splitext(file)
            if file_ext == '.bmp':
                img_des = os.path.join(new_dest, filename + '.jpg')
                img = cv2.imread(os.path.join(src, file))
                img = cv2.resize(img, (256, 256))
                img = cv2.copyMakeBorder(img, 2, 2, 2, 2, cv2.BORDER_CONSTANT, value=0)
                img = cv2.blur(img, (2, 2))
                cv2.imwrite(img_des, img)

# Source Location for Dataset
src = '/content/drive/Shareddrives/Computer Vision Final Project'
# Destination Location for Dataset
dest = '/content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer'
# Image Classes
classes = ["Dyskeratotic", "Koilocytotic", "Metaplastic", "Parabasal", "Superficial-Intermediate"]
# Formatting Dataset
FormatDataset(src, dest, classes)
---------------------------------------------------------------------------
FileExistsError                           Traceback (most recent call last)
/usr/lib/python3.10/os.py in makedirs(name, mode, exist_ok)
    224     try:
--> 225         mkdir(name, mode)
    226     except OSError:

FileExistsError: [Errno 17] File exists: '/content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/Parabasal/CROPPED'

During handling of the above exception, another exception occurred:

KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-3-326690194e6e> in <cell line: 41>()
     39 classes = ["Dyskeratotic", "Koilocytotic", "Metaplastic", "Parabasal", "Superficial-Intermediate"]
     40 # Formatting Dataset
---> 41 FormatDataset(src, dest, classes)

<ipython-input-3-326690194e6e> in FormatDataset(dataset_src, dataset_dest, classes)
      7     complete_src = [ dataset_src + "/im_" + cls + "/im_" + cls for cls in classes ]
      8     for (dest1, dest2) in zip(new_cropped_dest, new_complete_dest):
----> 9         os.makedirs(dest1, exist_ok=True)
     10         os.makedirs(dest2, exist_ok=True)
     11     # Formating Cropped Images

/usr/lib/python3.10/os.py in makedirs(name, mode, exist_ok)
    223             return
    224     try:
--> 225         mkdir(name, mode)
    226     except OSError:
    227         # Cannot rely on checking for EEXIST, since the operating system

KeyboardInterrupt: 

image.png

Count the number of images from each class under COMPLETE and CROPPED seperately.¶

In [ ]:
import os
import matplotlib.pyplot as plt

root_dir = "/content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer"

classes = ["Dyskeratotic","Koilocytotic","Metaplastic","Parabasal","Superficial-Intermediate"]

def GetDatasetSize(path, classes, main="CROPPED"):
    num_of_image = {}
    total_images = 0
    for cls in classes:
        # Counting the Number of Files in the Folder
        num_files = len(os.listdir(os.path.join(path, cls, main)))
        num_of_image[cls] = num_files
        total_images += num_files
    return num_of_image, total_images

def plot_class_distribution(class_image_counts):
    classes = list(class_image_counts.keys())
    counts = list(class_image_counts.values())

    colors = ['orange' if cls in ["Dyskeratotic", "Koilocytotic"] else
              'yellow' if cls == "Metaplastic" else
              'green' for cls in classes]

    plt.figure(figsize=(10, 6))
    plt.bar(classes, counts, color=colors)
    plt.xlabel('Class')
    plt.ylabel('Number of Images')
    plt.title('Number of Images per Class')
    plt.xticks(rotation=45)
    plt.show()
In [ ]:
class_image_counts, total_images = GetDatasetSize(root_dir, classes, "COMPLETE")
print("COMPLETE")
print("Number of images per class:", class_image_counts)
print("Total number of images:", total_images)

# Plot the distribution
plot_class_distribution(class_image_counts)
COMPLETE
Number of images per class: {'Dyskeratotic': 223, 'Koilocytotic': 238, 'Metaplastic': 271, 'Parabasal': 108, 'Superficial-Intermediate': 126}
Total number of images: 966
In [ ]:
class_image_counts, total_images = GetDatasetSize(root_dir, classes, "CROPPED")
print("CROPPED")
print("Number of images per class:", class_image_counts)
print("Total number of images:", total_images)

# Plot the distribution
plot_class_distribution(class_image_counts)
CROPPED
Number of images per class: {'Dyskeratotic': 813, 'Koilocytotic': 825, 'Metaplastic': 793, 'Parabasal': 787, 'Superficial-Intermediate': 831}
Total number of images: 4049

Present 5 images from each class under COMPLETE and CROPPED seperately.¶

In [ ]:
import os
import cv2
import matplotlib.pyplot as plt


def display_images(path, classes, main="CROPPED", num_images=5):
    fig, axes = plt.subplots(len(classes), num_images, figsize=(15, 15))

    color_map = {
        "Dyskeratotic": "orange",
        "Koilocytotic": "orange",
        "Metaplastic": "yellow",
        "Parabasal": "green",
        "Superficial-Intermediate": "green"
    }

    for i, cls in enumerate(classes):
        cls_path = os.path.join(path, cls, main) # Construct the path to the directory containing the images for the current class.
        images = os.listdir(cls_path)[:num_images]
        for j, image_file in enumerate(images):
            img_path = os.path.join(cls_path, image_file)
            img = cv2.imread(img_path)
            img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) # Convert the image from BGR (OpenCV default) to RGB (matplotlib default).
            axes[i, j].imshow(img) # Display the image in the appropriate subplot.
            axes[i, j].axis('off')
            if j == 0: # Set the title of the first image in each row to the class name.
                axes[i, j].set_title(cls, color='black', bbox=dict(facecolor=color_map[cls], edgecolor='none', pad=5))

    plt.tight_layout()
    plt.show()
In [ ]:
print("COMPLETE")
display_images(root_dir, classes, "COMPLETE", 5)
COMPLETE
In [ ]:
print("CROPPED")
display_images(root_dir, classes, "CROPPED", 5)
CROPPED

Split First: Split the CROPPED dataset into train, validation, and test sets.¶

Augment Training Set: Perform data augmentation only on the training set.¶

This way, we maintain the integrity of the validation and test sets while expanding the training data through augmentation.¶

If augmentation and then split dataset, there's a risk that augmented versions of the same image may end up in both the training and validation/test sets, which can lead to overfitting and overly optimistic performance estimates.¶

Split the Dataset into train, validation, and test for CROPPED¶

70% for Train Data¶

15% for Validation Data¶

15% for Testing Data¶

In [ ]:
import os
import shutil
import numpy as np

# Function for Creating Train / Validation / Test folders (One time use Only)
def TrainValTestSplit(root_dir, classes_dir, main="CROPPED", val_ratio=0.15, test_ratio=0.15):
    for cls in classes_dir:
        # Creating Split Folders inside the root_dir
        # For each class, create directories for training, validation, and test sets inside root_dir.
        os.makedirs(os.path.join(root_dir, 'train', cls), exist_ok=True)
        os.makedirs(os.path.join(root_dir, 'val', cls), exist_ok=True)
        os.makedirs(os.path.join(root_dir, 'test', cls), exist_ok=True)

        # Folder to copy images from
        src = os.path.join(root_dir, cls, main)

        # Splitting the Files in the Given ratio
        # List all file names in the source directory.
        allFileNames = os.listdir(src)
        # Shuffle the file names to randomize the order.
        np.random.shuffle(allFileNames)
        # Split the file names into training, validation, and testing sets based on the specified ratios.
        train_FileNames, val_FileNames, test_FileNames = np.split(
            np.array(allFileNames),
            [int(len(allFileNames) * (1 - (val_ratio + test_ratio))), int(len(allFileNames) * (1 - test_ratio))]
        )

        # Convert the file names into full file paths for training, validation, and testing sets.
        train_FileNames = [os.path.join(src, name) for name in train_FileNames.tolist()]
        val_FileNames = [os.path.join(src, name) for name in val_FileNames.tolist()]
        test_FileNames = [os.path.join(src, name) for name in test_FileNames.tolist()]

        # Printing the Split Details
        # Print the number of total images, training images, validation images, and testing images for each class.
        print(cls, ':')
        print('Total images: ', len(allFileNames))
        print('Training: ', len(train_FileNames))
        print('Validation: ', len(val_FileNames))
        print('Testing: ', len(test_FileNames))

        # Copy-pasting images to respective directories
        # Copy each image to its respective directory (train, val, or test) based on the split.
        for name in train_FileNames:
            shutil.copy(name, os.path.join(root_dir, 'train', cls))

        for name in val_FileNames:
            shutil.copy(name, os.path.join(root_dir, 'val', cls))

        for name in test_FileNames:
            shutil.copy(name, os.path.join(root_dir, 'test', cls))
        print()

# Performing Train / Validation / Test Split
root_dir = "/content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer"
classes = ["Dyskeratotic", "Koilocytotic", "Metaplastic", "Parabasal", "Superficial-Intermediate"]

TrainValTestSplit(root_dir, classes)
Dyskeratotic :
Total images:  813
Training:  569
Validation:  122
Testing:  122

Koilocytotic :
Total images:  825
Training:  577
Validation:  124
Testing:  124

Metaplastic :
Total images:  793
Training:  555
Validation:  119
Testing:  119

Parabasal :
Total images:  787
Training:  550
Validation:  118
Testing:  119

Superficial-Intermediate :
Total images:  831
Training:  581
Validation:  125
Testing:  125

Another way to count the number of images. It lines up with counts above.¶

In [ ]:
import os

# Function to count number of images in each class directory for train, val, and test
def count_images_in_split_dirs(root_dir, classes):
    splits = ['train', 'val', 'test']
    counts = {split: {cls: 0 for cls in classes} for split in splits}

    for split in splits:
        for cls in classes:
            class_dir = os.path.join(root_dir, split, cls)
            if os.path.exists(class_dir):
                counts[split][cls] = len(os.listdir(class_dir))
            else:
                counts[split][cls] = 0

    return counts

# Define the root directory and classes
root_dir = "/content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer"
classes = ["Dyskeratotic", "Koilocytotic", "Metaplastic", "Parabasal", "Superficial-Intermediate"]

# Get the counts of images
image_counts = count_images_in_split_dirs(root_dir, classes)

# Print the counts
for split in image_counts:
    print(f"\n{split.upper()}:")
    for cls in image_counts[split]:
        print(f"  {cls}: {image_counts[split][cls]} images")
TRAIN:
  Dyskeratotic: 569 images
  Koilocytotic: 577 images
  Metaplastic: 555 images
  Parabasal: 550 images
  Superficial-Intermediate: 581 images

VAL:
  Dyskeratotic: 122 images
  Koilocytotic: 124 images
  Metaplastic: 119 images
  Parabasal: 118 images
  Superficial-Intermediate: 125 images

TEST:
  Dyskeratotic: 122 images
  Koilocytotic: 124 images
  Metaplastic: 119 images
  Parabasal: 119 images
  Superficial-Intermediate: 125 images
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 

Transfer learning starts here to run the notebook!¶

In [ ]:
# Importing Necessary Libraries
import cv2
import os
import shutil
import math
import random
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt

import warnings
warnings.filterwarnings("ignore")
In [ ]:
# import shutil

# # Clear the existing mount point
# shutil.rmtree('/content/drive')

# Mount Google Drive
from google.colab import drive
drive.mount('/content/drive')
Mounted at /content/drive
In [ ]:
root_dir = "/content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer"
classes = ["Dyskeratotic", "Koilocytotic", "Metaplastic", "Parabasal", "Superficial-Intermediate"]

Building the transfer learning model¶

ResNet-50 useS residual learning to train very deep networks. Residual blocks help in mitigating the vanishing gradient problem, allowing for the training of deeper networks. A popular choice that balances depth and performance.¶

ResNet-50 is often considered the most similar in spirit to VGG-19 because both are built on straightforward, deep convolutional architectures. ResNet introduces residual connections, which help in training very deep networks without suffering from vanishing gradients.¶

In [ ]:
# Importing Keras for Image Classification
import keras
from keras.layers import Dense,Conv2D, Flatten, MaxPool2D, Dropout
from keras.models import Sequential
from keras.preprocessing import image
from keras.callbacks import ModelCheckpoint
from keras.models import load_model
In [ ]:
from tensorflow.keras.applications import ResNet50
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Dense, Flatten, Dropout
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import ModelCheckpoint
from tensorflow.keras.preprocessing.image import ImageDataGenerator
In [ ]:
# Load the ResNet50 model pre-trained on ImageNet, excluding the top layers

base_model = ResNet50(weights='imagenet', include_top=False, input_shape=(64, 64, 3))
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/resnet/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5
94765736/94765736 [==============================] - 0s 0us/step
In [ ]:
# Create the full model

model = Model(inputs=base_model.input, outputs=predictions)

# Compile the model

model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])

model.summary()
Model: "model"
__________________________________________________________________________________________________
 Layer (type)                Output Shape                 Param #   Connected to                  
==================================================================================================
 input_1 (InputLayer)        [(None, 64, 64, 3)]          0         []                            
                                                                                                  
 conv1_pad (ZeroPadding2D)   (None, 70, 70, 3)            0         ['input_1[0][0]']             
                                                                                                  
 conv1_conv (Conv2D)         (None, 32, 32, 64)           9472      ['conv1_pad[0][0]']           
                                                                                                  
 conv1_bn (BatchNormalizati  (None, 32, 32, 64)           256       ['conv1_conv[0][0]']          
 on)                                                                                              
                                                                                                  
 conv1_relu (Activation)     (None, 32, 32, 64)           0         ['conv1_bn[0][0]']            
                                                                                                  
 pool1_pad (ZeroPadding2D)   (None, 34, 34, 64)           0         ['conv1_relu[0][0]']          
                                                                                                  
 pool1_pool (MaxPooling2D)   (None, 16, 16, 64)           0         ['pool1_pad[0][0]']           
                                                                                                  
 conv2_block1_1_conv (Conv2  (None, 16, 16, 64)           4160      ['pool1_pool[0][0]']          
 D)                                                                                               
                                                                                                  
 conv2_block1_1_bn (BatchNo  (None, 16, 16, 64)           256       ['conv2_block1_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv2_block1_1_relu (Activ  (None, 16, 16, 64)           0         ['conv2_block1_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv2_block1_2_conv (Conv2  (None, 16, 16, 64)           36928     ['conv2_block1_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv2_block1_2_bn (BatchNo  (None, 16, 16, 64)           256       ['conv2_block1_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv2_block1_2_relu (Activ  (None, 16, 16, 64)           0         ['conv2_block1_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv2_block1_0_conv (Conv2  (None, 16, 16, 256)          16640     ['pool1_pool[0][0]']          
 D)                                                                                               
                                                                                                  
 conv2_block1_3_conv (Conv2  (None, 16, 16, 256)          16640     ['conv2_block1_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv2_block1_0_bn (BatchNo  (None, 16, 16, 256)          1024      ['conv2_block1_0_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv2_block1_3_bn (BatchNo  (None, 16, 16, 256)          1024      ['conv2_block1_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv2_block1_add (Add)      (None, 16, 16, 256)          0         ['conv2_block1_0_bn[0][0]',   
                                                                     'conv2_block1_3_bn[0][0]']   
                                                                                                  
 conv2_block1_out (Activati  (None, 16, 16, 256)          0         ['conv2_block1_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv2_block2_1_conv (Conv2  (None, 16, 16, 64)           16448     ['conv2_block1_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv2_block2_1_bn (BatchNo  (None, 16, 16, 64)           256       ['conv2_block2_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv2_block2_1_relu (Activ  (None, 16, 16, 64)           0         ['conv2_block2_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv2_block2_2_conv (Conv2  (None, 16, 16, 64)           36928     ['conv2_block2_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv2_block2_2_bn (BatchNo  (None, 16, 16, 64)           256       ['conv2_block2_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv2_block2_2_relu (Activ  (None, 16, 16, 64)           0         ['conv2_block2_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv2_block2_3_conv (Conv2  (None, 16, 16, 256)          16640     ['conv2_block2_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv2_block2_3_bn (BatchNo  (None, 16, 16, 256)          1024      ['conv2_block2_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv2_block2_add (Add)      (None, 16, 16, 256)          0         ['conv2_block1_out[0][0]',    
                                                                     'conv2_block2_3_bn[0][0]']   
                                                                                                  
 conv2_block2_out (Activati  (None, 16, 16, 256)          0         ['conv2_block2_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv2_block3_1_conv (Conv2  (None, 16, 16, 64)           16448     ['conv2_block2_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv2_block3_1_bn (BatchNo  (None, 16, 16, 64)           256       ['conv2_block3_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv2_block3_1_relu (Activ  (None, 16, 16, 64)           0         ['conv2_block3_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv2_block3_2_conv (Conv2  (None, 16, 16, 64)           36928     ['conv2_block3_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv2_block3_2_bn (BatchNo  (None, 16, 16, 64)           256       ['conv2_block3_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv2_block3_2_relu (Activ  (None, 16, 16, 64)           0         ['conv2_block3_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv2_block3_3_conv (Conv2  (None, 16, 16, 256)          16640     ['conv2_block3_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv2_block3_3_bn (BatchNo  (None, 16, 16, 256)          1024      ['conv2_block3_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv2_block3_add (Add)      (None, 16, 16, 256)          0         ['conv2_block2_out[0][0]',    
                                                                     'conv2_block3_3_bn[0][0]']   
                                                                                                  
 conv2_block3_out (Activati  (None, 16, 16, 256)          0         ['conv2_block3_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv3_block1_1_conv (Conv2  (None, 8, 8, 128)            32896     ['conv2_block3_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv3_block1_1_bn (BatchNo  (None, 8, 8, 128)            512       ['conv3_block1_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv3_block1_1_relu (Activ  (None, 8, 8, 128)            0         ['conv3_block1_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv3_block1_2_conv (Conv2  (None, 8, 8, 128)            147584    ['conv3_block1_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv3_block1_2_bn (BatchNo  (None, 8, 8, 128)            512       ['conv3_block1_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv3_block1_2_relu (Activ  (None, 8, 8, 128)            0         ['conv3_block1_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv3_block1_0_conv (Conv2  (None, 8, 8, 512)            131584    ['conv2_block3_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv3_block1_3_conv (Conv2  (None, 8, 8, 512)            66048     ['conv3_block1_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv3_block1_0_bn (BatchNo  (None, 8, 8, 512)            2048      ['conv3_block1_0_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv3_block1_3_bn (BatchNo  (None, 8, 8, 512)            2048      ['conv3_block1_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv3_block1_add (Add)      (None, 8, 8, 512)            0         ['conv3_block1_0_bn[0][0]',   
                                                                     'conv3_block1_3_bn[0][0]']   
                                                                                                  
 conv3_block1_out (Activati  (None, 8, 8, 512)            0         ['conv3_block1_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv3_block2_1_conv (Conv2  (None, 8, 8, 128)            65664     ['conv3_block1_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv3_block2_1_bn (BatchNo  (None, 8, 8, 128)            512       ['conv3_block2_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv3_block2_1_relu (Activ  (None, 8, 8, 128)            0         ['conv3_block2_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv3_block2_2_conv (Conv2  (None, 8, 8, 128)            147584    ['conv3_block2_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv3_block2_2_bn (BatchNo  (None, 8, 8, 128)            512       ['conv3_block2_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv3_block2_2_relu (Activ  (None, 8, 8, 128)            0         ['conv3_block2_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv3_block2_3_conv (Conv2  (None, 8, 8, 512)            66048     ['conv3_block2_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv3_block2_3_bn (BatchNo  (None, 8, 8, 512)            2048      ['conv3_block2_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv3_block2_add (Add)      (None, 8, 8, 512)            0         ['conv3_block1_out[0][0]',    
                                                                     'conv3_block2_3_bn[0][0]']   
                                                                                                  
 conv3_block2_out (Activati  (None, 8, 8, 512)            0         ['conv3_block2_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv3_block3_1_conv (Conv2  (None, 8, 8, 128)            65664     ['conv3_block2_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv3_block3_1_bn (BatchNo  (None, 8, 8, 128)            512       ['conv3_block3_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv3_block3_1_relu (Activ  (None, 8, 8, 128)            0         ['conv3_block3_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv3_block3_2_conv (Conv2  (None, 8, 8, 128)            147584    ['conv3_block3_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv3_block3_2_bn (BatchNo  (None, 8, 8, 128)            512       ['conv3_block3_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv3_block3_2_relu (Activ  (None, 8, 8, 128)            0         ['conv3_block3_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv3_block3_3_conv (Conv2  (None, 8, 8, 512)            66048     ['conv3_block3_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv3_block3_3_bn (BatchNo  (None, 8, 8, 512)            2048      ['conv3_block3_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv3_block3_add (Add)      (None, 8, 8, 512)            0         ['conv3_block2_out[0][0]',    
                                                                     'conv3_block3_3_bn[0][0]']   
                                                                                                  
 conv3_block3_out (Activati  (None, 8, 8, 512)            0         ['conv3_block3_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv3_block4_1_conv (Conv2  (None, 8, 8, 128)            65664     ['conv3_block3_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv3_block4_1_bn (BatchNo  (None, 8, 8, 128)            512       ['conv3_block4_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv3_block4_1_relu (Activ  (None, 8, 8, 128)            0         ['conv3_block4_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv3_block4_2_conv (Conv2  (None, 8, 8, 128)            147584    ['conv3_block4_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv3_block4_2_bn (BatchNo  (None, 8, 8, 128)            512       ['conv3_block4_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv3_block4_2_relu (Activ  (None, 8, 8, 128)            0         ['conv3_block4_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv3_block4_3_conv (Conv2  (None, 8, 8, 512)            66048     ['conv3_block4_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv3_block4_3_bn (BatchNo  (None, 8, 8, 512)            2048      ['conv3_block4_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv3_block4_add (Add)      (None, 8, 8, 512)            0         ['conv3_block3_out[0][0]',    
                                                                     'conv3_block4_3_bn[0][0]']   
                                                                                                  
 conv3_block4_out (Activati  (None, 8, 8, 512)            0         ['conv3_block4_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv4_block1_1_conv (Conv2  (None, 4, 4, 256)            131328    ['conv3_block4_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv4_block1_1_bn (BatchNo  (None, 4, 4, 256)            1024      ['conv4_block1_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block1_1_relu (Activ  (None, 4, 4, 256)            0         ['conv4_block1_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv4_block1_2_conv (Conv2  (None, 4, 4, 256)            590080    ['conv4_block1_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv4_block1_2_bn (BatchNo  (None, 4, 4, 256)            1024      ['conv4_block1_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block1_2_relu (Activ  (None, 4, 4, 256)            0         ['conv4_block1_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv4_block1_0_conv (Conv2  (None, 4, 4, 1024)           525312    ['conv3_block4_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv4_block1_3_conv (Conv2  (None, 4, 4, 1024)           263168    ['conv4_block1_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv4_block1_0_bn (BatchNo  (None, 4, 4, 1024)           4096      ['conv4_block1_0_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block1_3_bn (BatchNo  (None, 4, 4, 1024)           4096      ['conv4_block1_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block1_add (Add)      (None, 4, 4, 1024)           0         ['conv4_block1_0_bn[0][0]',   
                                                                     'conv4_block1_3_bn[0][0]']   
                                                                                                  
 conv4_block1_out (Activati  (None, 4, 4, 1024)           0         ['conv4_block1_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv4_block2_1_conv (Conv2  (None, 4, 4, 256)            262400    ['conv4_block1_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv4_block2_1_bn (BatchNo  (None, 4, 4, 256)            1024      ['conv4_block2_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block2_1_relu (Activ  (None, 4, 4, 256)            0         ['conv4_block2_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv4_block2_2_conv (Conv2  (None, 4, 4, 256)            590080    ['conv4_block2_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv4_block2_2_bn (BatchNo  (None, 4, 4, 256)            1024      ['conv4_block2_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block2_2_relu (Activ  (None, 4, 4, 256)            0         ['conv4_block2_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv4_block2_3_conv (Conv2  (None, 4, 4, 1024)           263168    ['conv4_block2_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv4_block2_3_bn (BatchNo  (None, 4, 4, 1024)           4096      ['conv4_block2_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block2_add (Add)      (None, 4, 4, 1024)           0         ['conv4_block1_out[0][0]',    
                                                                     'conv4_block2_3_bn[0][0]']   
                                                                                                  
 conv4_block2_out (Activati  (None, 4, 4, 1024)           0         ['conv4_block2_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv4_block3_1_conv (Conv2  (None, 4, 4, 256)            262400    ['conv4_block2_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv4_block3_1_bn (BatchNo  (None, 4, 4, 256)            1024      ['conv4_block3_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block3_1_relu (Activ  (None, 4, 4, 256)            0         ['conv4_block3_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv4_block3_2_conv (Conv2  (None, 4, 4, 256)            590080    ['conv4_block3_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv4_block3_2_bn (BatchNo  (None, 4, 4, 256)            1024      ['conv4_block3_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block3_2_relu (Activ  (None, 4, 4, 256)            0         ['conv4_block3_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv4_block3_3_conv (Conv2  (None, 4, 4, 1024)           263168    ['conv4_block3_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv4_block3_3_bn (BatchNo  (None, 4, 4, 1024)           4096      ['conv4_block3_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block3_add (Add)      (None, 4, 4, 1024)           0         ['conv4_block2_out[0][0]',    
                                                                     'conv4_block3_3_bn[0][0]']   
                                                                                                  
 conv4_block3_out (Activati  (None, 4, 4, 1024)           0         ['conv4_block3_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv4_block4_1_conv (Conv2  (None, 4, 4, 256)            262400    ['conv4_block3_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv4_block4_1_bn (BatchNo  (None, 4, 4, 256)            1024      ['conv4_block4_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block4_1_relu (Activ  (None, 4, 4, 256)            0         ['conv4_block4_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv4_block4_2_conv (Conv2  (None, 4, 4, 256)            590080    ['conv4_block4_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv4_block4_2_bn (BatchNo  (None, 4, 4, 256)            1024      ['conv4_block4_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block4_2_relu (Activ  (None, 4, 4, 256)            0         ['conv4_block4_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv4_block4_3_conv (Conv2  (None, 4, 4, 1024)           263168    ['conv4_block4_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv4_block4_3_bn (BatchNo  (None, 4, 4, 1024)           4096      ['conv4_block4_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block4_add (Add)      (None, 4, 4, 1024)           0         ['conv4_block3_out[0][0]',    
                                                                     'conv4_block4_3_bn[0][0]']   
                                                                                                  
 conv4_block4_out (Activati  (None, 4, 4, 1024)           0         ['conv4_block4_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv4_block5_1_conv (Conv2  (None, 4, 4, 256)            262400    ['conv4_block4_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv4_block5_1_bn (BatchNo  (None, 4, 4, 256)            1024      ['conv4_block5_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block5_1_relu (Activ  (None, 4, 4, 256)            0         ['conv4_block5_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv4_block5_2_conv (Conv2  (None, 4, 4, 256)            590080    ['conv4_block5_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv4_block5_2_bn (BatchNo  (None, 4, 4, 256)            1024      ['conv4_block5_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block5_2_relu (Activ  (None, 4, 4, 256)            0         ['conv4_block5_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv4_block5_3_conv (Conv2  (None, 4, 4, 1024)           263168    ['conv4_block5_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv4_block5_3_bn (BatchNo  (None, 4, 4, 1024)           4096      ['conv4_block5_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block5_add (Add)      (None, 4, 4, 1024)           0         ['conv4_block4_out[0][0]',    
                                                                     'conv4_block5_3_bn[0][0]']   
                                                                                                  
 conv4_block5_out (Activati  (None, 4, 4, 1024)           0         ['conv4_block5_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv4_block6_1_conv (Conv2  (None, 4, 4, 256)            262400    ['conv4_block5_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv4_block6_1_bn (BatchNo  (None, 4, 4, 256)            1024      ['conv4_block6_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block6_1_relu (Activ  (None, 4, 4, 256)            0         ['conv4_block6_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv4_block6_2_conv (Conv2  (None, 4, 4, 256)            590080    ['conv4_block6_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv4_block6_2_bn (BatchNo  (None, 4, 4, 256)            1024      ['conv4_block6_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block6_2_relu (Activ  (None, 4, 4, 256)            0         ['conv4_block6_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv4_block6_3_conv (Conv2  (None, 4, 4, 1024)           263168    ['conv4_block6_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv4_block6_3_bn (BatchNo  (None, 4, 4, 1024)           4096      ['conv4_block6_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv4_block6_add (Add)      (None, 4, 4, 1024)           0         ['conv4_block5_out[0][0]',    
                                                                     'conv4_block6_3_bn[0][0]']   
                                                                                                  
 conv4_block6_out (Activati  (None, 4, 4, 1024)           0         ['conv4_block6_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv5_block1_1_conv (Conv2  (None, 2, 2, 512)            524800    ['conv4_block6_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv5_block1_1_bn (BatchNo  (None, 2, 2, 512)            2048      ['conv5_block1_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv5_block1_1_relu (Activ  (None, 2, 2, 512)            0         ['conv5_block1_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv5_block1_2_conv (Conv2  (None, 2, 2, 512)            2359808   ['conv5_block1_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv5_block1_2_bn (BatchNo  (None, 2, 2, 512)            2048      ['conv5_block1_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv5_block1_2_relu (Activ  (None, 2, 2, 512)            0         ['conv5_block1_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv5_block1_0_conv (Conv2  (None, 2, 2, 2048)           2099200   ['conv4_block6_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv5_block1_3_conv (Conv2  (None, 2, 2, 2048)           1050624   ['conv5_block1_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv5_block1_0_bn (BatchNo  (None, 2, 2, 2048)           8192      ['conv5_block1_0_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv5_block1_3_bn (BatchNo  (None, 2, 2, 2048)           8192      ['conv5_block1_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv5_block1_add (Add)      (None, 2, 2, 2048)           0         ['conv5_block1_0_bn[0][0]',   
                                                                     'conv5_block1_3_bn[0][0]']   
                                                                                                  
 conv5_block1_out (Activati  (None, 2, 2, 2048)           0         ['conv5_block1_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv5_block2_1_conv (Conv2  (None, 2, 2, 512)            1049088   ['conv5_block1_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv5_block2_1_bn (BatchNo  (None, 2, 2, 512)            2048      ['conv5_block2_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv5_block2_1_relu (Activ  (None, 2, 2, 512)            0         ['conv5_block2_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv5_block2_2_conv (Conv2  (None, 2, 2, 512)            2359808   ['conv5_block2_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv5_block2_2_bn (BatchNo  (None, 2, 2, 512)            2048      ['conv5_block2_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv5_block2_2_relu (Activ  (None, 2, 2, 512)            0         ['conv5_block2_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv5_block2_3_conv (Conv2  (None, 2, 2, 2048)           1050624   ['conv5_block2_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv5_block2_3_bn (BatchNo  (None, 2, 2, 2048)           8192      ['conv5_block2_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv5_block2_add (Add)      (None, 2, 2, 2048)           0         ['conv5_block1_out[0][0]',    
                                                                     'conv5_block2_3_bn[0][0]']   
                                                                                                  
 conv5_block2_out (Activati  (None, 2, 2, 2048)           0         ['conv5_block2_add[0][0]']    
 on)                                                                                              
                                                                                                  
 conv5_block3_1_conv (Conv2  (None, 2, 2, 512)            1049088   ['conv5_block2_out[0][0]']    
 D)                                                                                               
                                                                                                  
 conv5_block3_1_bn (BatchNo  (None, 2, 2, 512)            2048      ['conv5_block3_1_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv5_block3_1_relu (Activ  (None, 2, 2, 512)            0         ['conv5_block3_1_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv5_block3_2_conv (Conv2  (None, 2, 2, 512)            2359808   ['conv5_block3_1_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv5_block3_2_bn (BatchNo  (None, 2, 2, 512)            2048      ['conv5_block3_2_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv5_block3_2_relu (Activ  (None, 2, 2, 512)            0         ['conv5_block3_2_bn[0][0]']   
 ation)                                                                                           
                                                                                                  
 conv5_block3_3_conv (Conv2  (None, 2, 2, 2048)           1050624   ['conv5_block3_2_relu[0][0]'] 
 D)                                                                                               
                                                                                                  
 conv5_block3_3_bn (BatchNo  (None, 2, 2, 2048)           8192      ['conv5_block3_3_conv[0][0]'] 
 rmalization)                                                                                     
                                                                                                  
 conv5_block3_add (Add)      (None, 2, 2, 2048)           0         ['conv5_block2_out[0][0]',    
                                                                     'conv5_block3_3_bn[0][0]']   
                                                                                                  
 conv5_block3_out (Activati  (None, 2, 2, 2048)           0         ['conv5_block3_add[0][0]']    
 on)                                                                                              
                                                                                                  
 flatten (Flatten)           (None, 8192)                 0         ['conv5_block3_out[0][0]']    
                                                                                                  
 dense (Dense)               (None, 4096)                 3355852   ['flatten[0][0]']             
                                                          8                                       
                                                                                                  
 dropout (Dropout)           (None, 4096)                 0         ['dense[0][0]']               
                                                                                                  
 dense_1 (Dense)             (None, 4096)                 1678131   ['dropout[0][0]']             
                                                          2                                       
                                                                                                  
 dropout_1 (Dropout)         (None, 4096)                 0         ['dense_1[0][0]']             
                                                                                                  
 dense_2 (Dense)             (None, 5)                    20485     ['dropout_1[0][0]']           
                                                                                                  
==================================================================================================
Total params: 73948037 (282.09 MB)
Trainable params: 73894917 (281.89 MB)
Non-trainable params: 53120 (207.50 KB)
__________________________________________________________________________________________________

Data augmentation using Keras' ImageDataGenerator class¶

Data augmentation is a technique used to artificially expand the size of a dataset by creating modified versions of images in the dataset. This helps improve the robustness and generalization ability of machine learning models, especially in tasks like image classification.¶

Ensure that all images fed into the model during training, validation, and testing have their pixel values normalized to the range [0, 1]¶

In [ ]:
# Expand the size of dataset with new transformed images from the original dataset using ImageDataGenerator.

train_datagen = image.ImageDataGenerator(zoom_range = 0.2, shear_range = 0.2 , rescale = 1./255 , horizontal_flip=True)

val_datagen = image.ImageDataGenerator(rescale = 1./255)
test_datagen = image.ImageDataGenerator(rescale = 1./255)
In [ ]:
# The train_data object is an instance of a Keras DirectoryIterator, which generates batches of data from the specified directory.
# The flow_from_directory method reads images from the specified directory and applies the transformations defined in the train_datagen object (such as augmentation and normalization).
# Resizes Images: All images are resized to 64x64 pixels.
# Batch Processing: Images are processed and yielded in batches of 100.
# Categorical Labels: The labels for the images are one-hot encoded.
# The flow_from_directory function is a powerful way to generate batches of tensor image data with real-time data augmentation. It is especially useful when having a large dataset organized into subdirectories by class. The function reads the images, applies the specified preprocessing steps, and yields them in batches for training the neural network.

train_data = train_datagen.flow_from_directory(directory= root_dir + "/train", target_size=(64, 64), batch_size=100, class_mode = 'categorical')
Found 2832 images belonging to 5 classes.
In [ ]:
train_data.class_indices
Out[ ]:
{'Dyskeratotic': 0,
 'Koilocytotic': 1,
 'Metaplastic': 2,
 'Parabasal': 3,
 'Superficial-Intermediate': 4}
In [ ]:
val_data = val_datagen.flow_from_directory(directory= root_dir + "/val", target_size=(64, 64), batch_size=100, class_mode = 'categorical')
Found 608 images belonging to 5 classes.
In [ ]:
test_data = test_datagen.flow_from_directory(directory= root_dir + "/test", target_size=(64, 64), batch_size=100, class_mode = 'categorical')
Found 609 images belonging to 5 classes.
In [ ]:
# Adding Model check point Callback
# This callback is used during the training process to save the model weights. It monitors the validation accuracy and saves the model only if there is an improvement.

from tensorflow.keras.callbacks import ModelCheckpoint

# Define the full filepath for saving the best model

filepath = os.path.join(root_dir, "cervical_cancer_best_model_ResNet-50.hdf5")

# Adding Model Checkpoint Callback

mc = ModelCheckpoint(
    filepath=filepath,
    monitor='val_accuracy',
    verbose=1, # When set to 1, the callback will print messages when the model is being saved.
    save_best_only=True, # When set to True, the callback saves the model only when the monitored metric (val_accuracy) improves. This ensures that only the best model, in terms of validation accuracy, is saved.
    mode='auto'
)
call_back = [mc]
In [ ]:
# Fitting the Model

# steps_per_epoch = 28 (how many batches on one epoch)
# This parameter defines the number of batches of samples to be used in each epoch.
# Essentially, it is the number of times the model will be updated in one epoch.
# Since the batch size is 100, then 28 steps per epoch mean that the model will see 2800 (28 * 100) samples in one epoch.

# validation_steps=6
# This parameter defines the number of batches of samples to be used in each validation epoch.
# This means that in each epoch, the model will see 600 (6 * 100) samples from the validation set.

cnn = model.fit(train_data,
                  steps_per_epoch= 28,
                  epochs= 64,
                  validation_data= val_data,
                  validation_steps= 6,
                  callbacks = call_back )
Epoch 1/64
28/28 [==============================] - ETA: 0s - loss: 3.2650 - accuracy: 0.6856 
Epoch 1: val_accuracy improved from -inf to 0.20333, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 623s 21s/step - loss: 3.2650 - accuracy: 0.6856 - val_loss: 134.0110 - val_accuracy: 0.2033
Epoch 2/64
28/28 [==============================] - ETA: 0s - loss: 0.4542 - accuracy: 0.8712
Epoch 2: val_accuracy did not improve from 0.20333
28/28 [==============================] - 14s 413ms/step - loss: 0.4542 - accuracy: 0.8712 - val_loss: 4.7824 - val_accuracy: 0.1383
Epoch 3/64
28/28 [==============================] - ETA: 0s - loss: 0.4179 - accuracy: 0.8909
Epoch 3: val_accuracy did not improve from 0.20333
28/28 [==============================] - 11s 392ms/step - loss: 0.4179 - accuracy: 0.8909 - val_loss: 3.3701 - val_accuracy: 0.1900
Epoch 4/64
28/28 [==============================] - ETA: 0s - loss: 0.5150 - accuracy: 0.8459
Epoch 4: val_accuracy did not improve from 0.20333
28/28 [==============================] - 11s 381ms/step - loss: 0.5150 - accuracy: 0.8459 - val_loss: 18.4217 - val_accuracy: 0.1933
Epoch 5/64
28/28 [==============================] - ETA: 0s - loss: 0.3432 - accuracy: 0.8982
Epoch 5: val_accuracy did not improve from 0.20333
28/28 [==============================] - 11s 392ms/step - loss: 0.3432 - accuracy: 0.8982 - val_loss: 16.1079 - val_accuracy: 0.1950
Epoch 6/64
28/28 [==============================] - ETA: 0s - loss: 0.2432 - accuracy: 0.9154
Epoch 6: val_accuracy did not improve from 0.20333
28/28 [==============================] - 11s 391ms/step - loss: 0.2432 - accuracy: 0.9154 - val_loss: 16.7747 - val_accuracy: 0.1933
Epoch 7/64
28/28 [==============================] - ETA: 0s - loss: 0.2177 - accuracy: 0.9286
Epoch 7: val_accuracy did not improve from 0.20333
28/28 [==============================] - 11s 385ms/step - loss: 0.2177 - accuracy: 0.9286 - val_loss: 7.3999 - val_accuracy: 0.1900
Epoch 8/64
28/28 [==============================] - ETA: 0s - loss: 0.1532 - accuracy: 0.9510
Epoch 8: val_accuracy improved from 0.20333 to 0.24167, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 18s 660ms/step - loss: 0.1532 - accuracy: 0.9510 - val_loss: 1.7790 - val_accuracy: 0.2417
Epoch 9/64
28/28 [==============================] - ETA: 0s - loss: 0.1991 - accuracy: 0.9319
Epoch 9: val_accuracy did not improve from 0.24167
28/28 [==============================] - 13s 424ms/step - loss: 0.1991 - accuracy: 0.9319 - val_loss: 7.3242 - val_accuracy: 0.1900
Epoch 10/64
28/28 [==============================] - ETA: 0s - loss: 0.1389 - accuracy: 0.9553
Epoch 10: val_accuracy did not improve from 0.24167
28/28 [==============================] - 11s 383ms/step - loss: 0.1389 - accuracy: 0.9553 - val_loss: 2.9678 - val_accuracy: 0.1950
Epoch 11/64
28/28 [==============================] - ETA: 0s - loss: 0.1643 - accuracy: 0.9520
Epoch 11: val_accuracy did not improve from 0.24167
28/28 [==============================] - 11s 372ms/step - loss: 0.1643 - accuracy: 0.9520 - val_loss: 2.1639 - val_accuracy: 0.1950
Epoch 12/64
28/28 [==============================] - ETA: 0s - loss: 0.1349 - accuracy: 0.9550
Epoch 12: val_accuracy did not improve from 0.24167
28/28 [==============================] - 11s 386ms/step - loss: 0.1349 - accuracy: 0.9550 - val_loss: 2.0321 - val_accuracy: 0.1900
Epoch 13/64
28/28 [==============================] - ETA: 0s - loss: 0.1199 - accuracy: 0.9597
Epoch 13: val_accuracy did not improve from 0.24167
28/28 [==============================] - 11s 382ms/step - loss: 0.1199 - accuracy: 0.9597 - val_loss: 1.7080 - val_accuracy: 0.2033
Epoch 14/64
28/28 [==============================] - ETA: 0s - loss: 0.1202 - accuracy: 0.9594
Epoch 14: val_accuracy improved from 0.24167 to 0.28500, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 19s 696ms/step - loss: 0.1202 - accuracy: 0.9594 - val_loss: 1.6064 - val_accuracy: 0.2850
Epoch 15/64
28/28 [==============================] - ETA: 0s - loss: 0.0932 - accuracy: 0.9740
Epoch 15: val_accuracy improved from 0.28500 to 0.30833, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 17s 552ms/step - loss: 0.0932 - accuracy: 0.9740 - val_loss: 1.6012 - val_accuracy: 0.3083
Epoch 16/64
28/28 [==============================] - ETA: 0s - loss: 0.0912 - accuracy: 0.9674
Epoch 16: val_accuracy improved from 0.30833 to 0.33167, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 16s 521ms/step - loss: 0.0912 - accuracy: 0.9674 - val_loss: 1.5446 - val_accuracy: 0.3317
Epoch 17/64
28/28 [==============================] - ETA: 0s - loss: 0.1173 - accuracy: 0.9632
Epoch 17: val_accuracy did not improve from 0.33167
28/28 [==============================] - 13s 414ms/step - loss: 0.1173 - accuracy: 0.9632 - val_loss: 1.6697 - val_accuracy: 0.2267
Epoch 18/64
28/28 [==============================] - ETA: 0s - loss: 0.1021 - accuracy: 0.9656
Epoch 18: val_accuracy did not improve from 0.33167
28/28 [==============================] - 11s 395ms/step - loss: 0.1021 - accuracy: 0.9656 - val_loss: 1.9251 - val_accuracy: 0.3267
Epoch 19/64
28/28 [==============================] - ETA: 0s - loss: 0.1108 - accuracy: 0.9700
Epoch 19: val_accuracy did not improve from 0.33167
28/28 [==============================] - 11s 391ms/step - loss: 0.1108 - accuracy: 0.9700 - val_loss: 1.5243 - val_accuracy: 0.3067
Epoch 20/64
28/28 [==============================] - ETA: 0s - loss: 0.3618 - accuracy: 0.9337
Epoch 20: val_accuracy did not improve from 0.33167
28/28 [==============================] - 11s 389ms/step - loss: 0.3618 - accuracy: 0.9337 - val_loss: 7.2511 - val_accuracy: 0.1933
Epoch 21/64
28/28 [==============================] - ETA: 0s - loss: 0.2331 - accuracy: 0.9436
Epoch 21: val_accuracy did not improve from 0.33167
28/28 [==============================] - 11s 378ms/step - loss: 0.2331 - accuracy: 0.9436 - val_loss: 2.1379 - val_accuracy: 0.3000
Epoch 22/64
28/28 [==============================] - ETA: 0s - loss: 0.4373 - accuracy: 0.8993
Epoch 22: val_accuracy did not improve from 0.33167
28/28 [==============================] - 11s 405ms/step - loss: 0.4373 - accuracy: 0.8993 - val_loss: 2.1124 - val_accuracy: 0.2233
Epoch 23/64
28/28 [==============================] - ETA: 0s - loss: 0.3737 - accuracy: 0.9253
Epoch 23: val_accuracy did not improve from 0.33167
28/28 [==============================] - 11s 400ms/step - loss: 0.3737 - accuracy: 0.9253 - val_loss: 2.4645 - val_accuracy: 0.2667
Epoch 24/64
28/28 [==============================] - ETA: 0s - loss: 0.2282 - accuracy: 0.9268
Epoch 24: val_accuracy did not improve from 0.33167
28/28 [==============================] - 11s 380ms/step - loss: 0.2282 - accuracy: 0.9268 - val_loss: 7.9874 - val_accuracy: 0.1950
Epoch 25/64
28/28 [==============================] - ETA: 0s - loss: 0.1600 - accuracy: 0.9458
Epoch 25: val_accuracy did not improve from 0.33167
28/28 [==============================] - 11s 378ms/step - loss: 0.1600 - accuracy: 0.9458 - val_loss: 3.7202 - val_accuracy: 0.2550
Epoch 26/64
28/28 [==============================] - ETA: 0s - loss: 0.1278 - accuracy: 0.9619
Epoch 26: val_accuracy improved from 0.33167 to 0.46667, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 17s 626ms/step - loss: 0.1278 - accuracy: 0.9619 - val_loss: 1.3180 - val_accuracy: 0.4667
Epoch 27/64
28/28 [==============================] - ETA: 0s - loss: 0.1052 - accuracy: 0.9674
Epoch 27: val_accuracy improved from 0.46667 to 0.60833, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 17s 541ms/step - loss: 0.1052 - accuracy: 0.9674 - val_loss: 0.9041 - val_accuracy: 0.6083
Epoch 28/64
28/28 [==============================] - ETA: 0s - loss: 0.0964 - accuracy: 0.9652
Epoch 28: val_accuracy improved from 0.60833 to 0.62667, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 17s 524ms/step - loss: 0.0964 - accuracy: 0.9652 - val_loss: 1.1273 - val_accuracy: 0.6267
Epoch 29/64
28/28 [==============================] - ETA: 0s - loss: 0.0857 - accuracy: 0.9693
Epoch 29: val_accuracy did not improve from 0.62667
28/28 [==============================] - 13s 393ms/step - loss: 0.0857 - accuracy: 0.9693 - val_loss: 1.5595 - val_accuracy: 0.5433
Epoch 30/64
28/28 [==============================] - ETA: 0s - loss: 0.0949 - accuracy: 0.9722
Epoch 30: val_accuracy improved from 0.62667 to 0.77500, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 18s 659ms/step - loss: 0.0949 - accuracy: 0.9722 - val_loss: 0.6040 - val_accuracy: 0.7750
Epoch 31/64
28/28 [==============================] - ETA: 0s - loss: 0.0831 - accuracy: 0.9733
Epoch 31: val_accuracy did not improve from 0.77500
28/28 [==============================] - 13s 398ms/step - loss: 0.0831 - accuracy: 0.9733 - val_loss: 1.1937 - val_accuracy: 0.7233
Epoch 32/64
28/28 [==============================] - ETA: 0s - loss: 0.0702 - accuracy: 0.9791
Epoch 32: val_accuracy did not improve from 0.77500
28/28 [==============================] - 11s 387ms/step - loss: 0.0702 - accuracy: 0.9791 - val_loss: 0.8551 - val_accuracy: 0.7317
Epoch 33/64
28/28 [==============================] - ETA: 0s - loss: 0.0992 - accuracy: 0.9689
Epoch 33: val_accuracy improved from 0.77500 to 0.82833, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 18s 636ms/step - loss: 0.0992 - accuracy: 0.9689 - val_loss: 0.4774 - val_accuracy: 0.8283
Epoch 34/64
28/28 [==============================] - ETA: 0s - loss: 0.0823 - accuracy: 0.9711
Epoch 34: val_accuracy improved from 0.82833 to 0.86333, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 17s 518ms/step - loss: 0.0823 - accuracy: 0.9711 - val_loss: 0.5064 - val_accuracy: 0.8633
Epoch 35/64
28/28 [==============================] - ETA: 0s - loss: 0.1023 - accuracy: 0.9704
Epoch 35: val_accuracy improved from 0.86333 to 0.86667, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 17s 524ms/step - loss: 0.1023 - accuracy: 0.9704 - val_loss: 0.4030 - val_accuracy: 0.8667
Epoch 36/64
28/28 [==============================] - ETA: 0s - loss: 0.1072 - accuracy: 0.9788
Epoch 36: val_accuracy improved from 0.86667 to 0.87000, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 16s 518ms/step - loss: 0.1072 - accuracy: 0.9788 - val_loss: 0.4046 - val_accuracy: 0.8700
Epoch 37/64
28/28 [==============================] - ETA: 0s - loss: 0.0698 - accuracy: 0.9780
Epoch 37: val_accuracy did not improve from 0.87000
28/28 [==============================] - 13s 396ms/step - loss: 0.0698 - accuracy: 0.9780 - val_loss: 1.3041 - val_accuracy: 0.7350
Epoch 38/64
28/28 [==============================] - ETA: 0s - loss: 0.0576 - accuracy: 0.9813
Epoch 38: val_accuracy improved from 0.87000 to 0.89167, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 18s 642ms/step - loss: 0.0576 - accuracy: 0.9813 - val_loss: 0.4004 - val_accuracy: 0.8917
Epoch 39/64
28/28 [==============================] - ETA: 0s - loss: 0.0878 - accuracy: 0.9725
Epoch 39: val_accuracy improved from 0.89167 to 0.89667, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 17s 551ms/step - loss: 0.0878 - accuracy: 0.9725 - val_loss: 0.3704 - val_accuracy: 0.8967
Epoch 40/64
28/28 [==============================] - ETA: 0s - loss: 0.0606 - accuracy: 0.9802
Epoch 40: val_accuracy improved from 0.89667 to 0.92000, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 16s 527ms/step - loss: 0.0606 - accuracy: 0.9802 - val_loss: 0.2797 - val_accuracy: 0.9200
Epoch 41/64
28/28 [==============================] - ETA: 0s - loss: 0.0614 - accuracy: 0.9802
Epoch 41: val_accuracy did not improve from 0.92000
28/28 [==============================] - 13s 401ms/step - loss: 0.0614 - accuracy: 0.9802 - val_loss: 0.5112 - val_accuracy: 0.8650
Epoch 42/64
28/28 [==============================] - ETA: 0s - loss: 0.0571 - accuracy: 0.9810
Epoch 42: val_accuracy did not improve from 0.92000
28/28 [==============================] - 11s 393ms/step - loss: 0.0571 - accuracy: 0.9810 - val_loss: 0.3114 - val_accuracy: 0.9133
Epoch 43/64
28/28 [==============================] - ETA: 0s - loss: 0.0332 - accuracy: 0.9879
Epoch 43: val_accuracy improved from 0.92000 to 0.92167, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 19s 697ms/step - loss: 0.0332 - accuracy: 0.9879 - val_loss: 0.3456 - val_accuracy: 0.9217
Epoch 44/64
28/28 [==============================] - ETA: 0s - loss: 0.0381 - accuracy: 0.9876
Epoch 44: val_accuracy improved from 0.92167 to 0.92667, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 16s 511ms/step - loss: 0.0381 - accuracy: 0.9876 - val_loss: 0.3255 - val_accuracy: 0.9267
Epoch 45/64
28/28 [==============================] - ETA: 0s - loss: 0.0835 - accuracy: 0.9747
Epoch 45: val_accuracy did not improve from 0.92667
28/28 [==============================] - 13s 391ms/step - loss: 0.0835 - accuracy: 0.9747 - val_loss: 0.3059 - val_accuracy: 0.9267
Epoch 46/64
28/28 [==============================] - ETA: 0s - loss: 0.0517 - accuracy: 0.9835
Epoch 46: val_accuracy improved from 0.92667 to 0.94167, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 18s 638ms/step - loss: 0.0517 - accuracy: 0.9835 - val_loss: 0.2137 - val_accuracy: 0.9417
Epoch 47/64
28/28 [==============================] - ETA: 0s - loss: 0.0469 - accuracy: 0.9865
Epoch 47: val_accuracy did not improve from 0.94167
28/28 [==============================] - 13s 396ms/step - loss: 0.0469 - accuracy: 0.9865 - val_loss: 0.2654 - val_accuracy: 0.9267
Epoch 48/64
28/28 [==============================] - ETA: 0s - loss: 0.0672 - accuracy: 0.9817
Epoch 48: val_accuracy did not improve from 0.94167
28/28 [==============================] - 11s 400ms/step - loss: 0.0672 - accuracy: 0.9817 - val_loss: 0.3675 - val_accuracy: 0.9000
Epoch 49/64
28/28 [==============================] - ETA: 0s - loss: 0.0505 - accuracy: 0.9868
Epoch 49: val_accuracy improved from 0.94167 to 0.94667, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 19s 686ms/step - loss: 0.0505 - accuracy: 0.9868 - val_loss: 0.2346 - val_accuracy: 0.9467
Epoch 50/64
28/28 [==============================] - ETA: 0s - loss: 0.0715 - accuracy: 0.9810
Epoch 50: val_accuracy did not improve from 0.94667
28/28 [==============================] - 13s 399ms/step - loss: 0.0715 - accuracy: 0.9810 - val_loss: 23.1429 - val_accuracy: 0.7333
Epoch 51/64
28/28 [==============================] - ETA: 0s - loss: 0.0664 - accuracy: 0.9769
Epoch 51: val_accuracy did not improve from 0.94667
28/28 [==============================] - 11s 385ms/step - loss: 0.0664 - accuracy: 0.9769 - val_loss: 0.8457 - val_accuracy: 0.8517
Epoch 52/64
28/28 [==============================] - ETA: 0s - loss: 0.0682 - accuracy: 0.9799
Epoch 52: val_accuracy did not improve from 0.94667
28/28 [==============================] - 11s 382ms/step - loss: 0.0682 - accuracy: 0.9799 - val_loss: 0.2601 - val_accuracy: 0.9183
Epoch 53/64
28/28 [==============================] - ETA: 0s - loss: 0.0496 - accuracy: 0.9843
Epoch 53: val_accuracy did not improve from 0.94667
28/28 [==============================] - 11s 374ms/step - loss: 0.0496 - accuracy: 0.9843 - val_loss: 0.2636 - val_accuracy: 0.9267
Epoch 54/64
28/28 [==============================] - ETA: 0s - loss: 0.0440 - accuracy: 0.9865
Epoch 54: val_accuracy did not improve from 0.94667
28/28 [==============================] - 11s 379ms/step - loss: 0.0440 - accuracy: 0.9865 - val_loss: 0.2385 - val_accuracy: 0.9417
Epoch 55/64
28/28 [==============================] - ETA: 0s - loss: 0.0246 - accuracy: 0.9918
Epoch 55: val_accuracy improved from 0.94667 to 0.94833, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_ResNet-50.hdf5
28/28 [==============================] - 18s 636ms/step - loss: 0.0246 - accuracy: 0.9918 - val_loss: 0.2595 - val_accuracy: 0.9483
Epoch 56/64
28/28 [==============================] - ETA: 0s - loss: 0.2834 - accuracy: 0.9883
Epoch 56: val_accuracy did not improve from 0.94833
28/28 [==============================] - 13s 401ms/step - loss: 0.2834 - accuracy: 0.9883 - val_loss: 6.6538 - val_accuracy: 0.3650
Epoch 57/64
28/28 [==============================] - ETA: 0s - loss: 0.4093 - accuracy: 0.8821
Epoch 57: val_accuracy did not improve from 0.94833
28/28 [==============================] - 11s 388ms/step - loss: 0.4093 - accuracy: 0.8821 - val_loss: 7460.0474 - val_accuracy: 0.1983
Epoch 58/64
28/28 [==============================] - ETA: 0s - loss: 0.4295 - accuracy: 0.9074
Epoch 58: val_accuracy did not improve from 0.94833
28/28 [==============================] - 11s 379ms/step - loss: 0.4295 - accuracy: 0.9074 - val_loss: 101390.4141 - val_accuracy: 0.3233
Epoch 59/64
28/28 [==============================] - ETA: 0s - loss: 0.6900 - accuracy: 0.8939
Epoch 59: val_accuracy did not improve from 0.94833
28/28 [==============================] - 11s 381ms/step - loss: 0.6900 - accuracy: 0.8939 - val_loss: 499.5102 - val_accuracy: 0.3667
Epoch 60/64
28/28 [==============================] - ETA: 0s - loss: 0.3896 - accuracy: 0.9198
Epoch 60: val_accuracy did not improve from 0.94833
28/28 [==============================] - 11s 376ms/step - loss: 0.3896 - accuracy: 0.9198 - val_loss: 1086.9932 - val_accuracy: 0.3700
Epoch 61/64
28/28 [==============================] - ETA: 0s - loss: 0.3533 - accuracy: 0.9129
Epoch 61: val_accuracy did not improve from 0.94833
28/28 [==============================] - 11s 382ms/step - loss: 0.3533 - accuracy: 0.9129 - val_loss: 856.5764 - val_accuracy: 0.3833
Epoch 62/64
28/28 [==============================] - ETA: 0s - loss: 0.2202 - accuracy: 0.9279
Epoch 62: val_accuracy did not improve from 0.94833
28/28 [==============================] - 11s 378ms/step - loss: 0.2202 - accuracy: 0.9279 - val_loss: 64.6030 - val_accuracy: 0.4300
Epoch 63/64
28/28 [==============================] - ETA: 0s - loss: 0.1676 - accuracy: 0.9488
Epoch 63: val_accuracy did not improve from 0.94833
28/28 [==============================] - 11s 374ms/step - loss: 0.1676 - accuracy: 0.9488 - val_loss: 14.8945 - val_accuracy: 0.5650
Epoch 64/64
28/28 [==============================] - ETA: 0s - loss: 0.1218 - accuracy: 0.9586
Epoch 64: val_accuracy did not improve from 0.94833
28/28 [==============================] - 10s 362ms/step - loss: 0.1218 - accuracy: 0.9586 - val_loss: 6.0849 - val_accuracy: 0.6933
In [ ]:
# Loading the Best Fit Model
model = load_model(root_dir + "/cervical_cancer_best_model_ResNet-50.hdf5")

Model Accuracy¶

In [ ]:
# Checking the Accuracy of the Model

accuracy = model.evaluate_generator(generator= test_data)[1]
print(f"The accuracy of your ResNet-50 model is = {accuracy*100} %")
The accuracy of your ResNet-50 model is = 93.43185424804688 %
In [ ]:
# [1]: This accesses the second element of the returned list, which corresponds to the accuracy of the model. The first element ([0]) is the loss.
In [ ]:
h =  cnn.history;
h.keys();
In [ ]:
# Ploting Accuracy In Training Set & Validation Set

plt.plot(h['accuracy'])
plt.plot(h['val_accuracy'] , c = "red")
plt.title("acc vs v-acc")
plt.show()
In [ ]:
# Ploting Loss In Training Set & Validation Set

plt.plot(h['loss'])
plt.plot(h['val_loss'] , c = "red")
plt.title("loss vs v-loss")
plt.show()
In [ ]:
def cancerPrediction(path):
    classes_dir = ["Dyskeratotic","Koilocytotic","Metaplastic","Parabasal","Superficial-Intermediate"]
    # Loading Image
    img = image.load_img(path, target_size=(64,64))
    # Normalizing Image
    norm_img = image.img_to_array(img)/255
    # Converting Image to Numpy Array
    input_arr_img = np.array([norm_img])
    # Getting Predictions
    pred = np.argmax(model.predict(input_arr_img))
    # Printing Model Prediction
    print(classes_dir[pred])

path = "/content/drive/Shareddrives/Computer Vision Final Project/im_Dyskeratotic/im_Dyskeratotic/CROPPED/002_04.bmp"
cancerPrediction(path)
1/1 [==============================] - 2s 2s/step
Dyskeratotic
In [ ]:
 

For each epoch, for all the 2832 training images, they go through data augmentation? so basically the first epoch's images may look different than second epoch's images?¶

image.png

image.png

In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:
 

With VGG Model

In [ ]:
from tensorflow.keras.applications import VGG16
from tensorflow.keras.models import Model, load_model
from tensorflow.keras.layers import Dense, Flatten
from tensorflow.keras.preprocessing import image
from tensorflow.keras.callbacks import ModelCheckpoint
import os

# Load the VGG16 model with pre-trained ImageNet weights, excluding the top classification layer
base_model = VGG16(weights='imagenet', include_top=False, input_shape=(64, 64, 3))

# Add custom classification layers on top of the VGG16 base model
x = base_model.output
x = Flatten()(x)
x = Dense(1024, activation='relu')(x)
predictions = Dense(5, activation='softmax')(x)  # 5 classes

# Create the full model
model = Model(inputs=base_model.input, outputs=predictions)

# Compile the model
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])

model.summary()
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/vgg16/vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5
58889256/58889256 [==============================] - 0s 0us/step
Model: "model"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 input_1 (InputLayer)        [(None, 64, 64, 3)]       0         
                                                                 
 block1_conv1 (Conv2D)       (None, 64, 64, 64)        1792      
                                                                 
 block1_conv2 (Conv2D)       (None, 64, 64, 64)        36928     
                                                                 
 block1_pool (MaxPooling2D)  (None, 32, 32, 64)        0         
                                                                 
 block2_conv1 (Conv2D)       (None, 32, 32, 128)       73856     
                                                                 
 block2_conv2 (Conv2D)       (None, 32, 32, 128)       147584    
                                                                 
 block2_pool (MaxPooling2D)  (None, 16, 16, 128)       0         
                                                                 
 block3_conv1 (Conv2D)       (None, 16, 16, 256)       295168    
                                                                 
 block3_conv2 (Conv2D)       (None, 16, 16, 256)       590080    
                                                                 
 block3_conv3 (Conv2D)       (None, 16, 16, 256)       590080    
                                                                 
 block3_pool (MaxPooling2D)  (None, 8, 8, 256)         0         
                                                                 
 block4_conv1 (Conv2D)       (None, 8, 8, 512)         1180160   
                                                                 
 block4_conv2 (Conv2D)       (None, 8, 8, 512)         2359808   
                                                                 
 block4_conv3 (Conv2D)       (None, 8, 8, 512)         2359808   
                                                                 
 block4_pool (MaxPooling2D)  (None, 4, 4, 512)         0         
                                                                 
 block5_conv1 (Conv2D)       (None, 4, 4, 512)         2359808   
                                                                 
 block5_conv2 (Conv2D)       (None, 4, 4, 512)         2359808   
                                                                 
 block5_conv3 (Conv2D)       (None, 4, 4, 512)         2359808   
                                                                 
 block5_pool (MaxPooling2D)  (None, 2, 2, 512)         0         
                                                                 
 flatten (Flatten)           (None, 2048)              0         
                                                                 
 dense (Dense)               (None, 1024)              2098176   
                                                                 
 dense_1 (Dense)             (None, 5)                 5125      
                                                                 
=================================================================
Total params: 16817989 (64.16 MB)
Trainable params: 16817989 (64.16 MB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________
In [ ]:
# Expand the size of dataset with new transformed images from the original dataset using ImageDataGenerator
train_datagen = image.ImageDataGenerator(zoom_range=0.2, shear_range=0.2, rescale=1./255, horizontal_flip=True)
val_datagen = image.ImageDataGenerator(rescale=1./255)
test_datagen = image.ImageDataGenerator(rescale=1./255)

# Directory iterators
train_data = train_datagen.flow_from_directory(directory=root_dir + "/train", target_size=(64, 64), batch_size=100, class_mode='categorical')
val_data = val_datagen.flow_from_directory(directory=root_dir + "/val", target_size=(64, 64), batch_size=100, class_mode='categorical')
test_data = test_datagen.flow_from_directory(directory=root_dir + "/test", target_size=(64, 64), batch_size=100, class_mode='categorical')

# Adding Model Checkpoint Callback
filepath = os.path.join(root_dir, "cervical_cancer_best_model_VGG16.hdf5")

mc = ModelCheckpoint(
    filepath=filepath,
    monitor='val_accuracy',
    verbose=1,
    save_best_only=True,
    mode='auto'
)
call_back = [mc]
Found 2832 images belonging to 5 classes.
Found 608 images belonging to 5 classes.
Found 609 images belonging to 5 classes.
In [ ]:
# Fitting the Model
cnn = model.fit(train_data,
                steps_per_epoch=28,
                epochs=64,
                validation_data=val_data,
                validation_steps=6,
                callbacks=call_back)
Epoch 1/64
28/28 [==============================] - ETA: 0s - loss: 2.3821 - accuracy: 0.2185 
Epoch 1: val_accuracy improved from -inf to 0.25667, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 670s 24s/step - loss: 2.3821 - accuracy: 0.2185 - val_loss: 1.5894 - val_accuracy: 0.2567
Epoch 2/64
28/28 [==============================] - ETA: 0s - loss: 1.5034 - accuracy: 0.3177
Epoch 2: val_accuracy improved from 0.25667 to 0.36000, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 135s 5s/step - loss: 1.5034 - accuracy: 0.3177 - val_loss: 1.3370 - val_accuracy: 0.3600
Epoch 3/64
28/28 [==============================] - ETA: 0s - loss: 1.2883 - accuracy: 0.3957
Epoch 3: val_accuracy improved from 0.36000 to 0.42833, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 134s 5s/step - loss: 1.2883 - accuracy: 0.3957 - val_loss: 1.1960 - val_accuracy: 0.4283
Epoch 4/64
28/28 [==============================] - ETA: 0s - loss: 1.2143 - accuracy: 0.4400
Epoch 4: val_accuracy did not improve from 0.42833
28/28 [==============================] - 133s 5s/step - loss: 1.2143 - accuracy: 0.4400 - val_loss: 1.2427 - val_accuracy: 0.4200
Epoch 5/64
28/28 [==============================] - ETA: 0s - loss: 1.1178 - accuracy: 0.4960
Epoch 5: val_accuracy improved from 0.42833 to 0.55167, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 136s 5s/step - loss: 1.1178 - accuracy: 0.4960 - val_loss: 1.0255 - val_accuracy: 0.5517
Epoch 6/64
28/28 [==============================] - ETA: 0s - loss: 1.0255 - accuracy: 0.5410
Epoch 6: val_accuracy did not improve from 0.55167
28/28 [==============================] - 134s 5s/step - loss: 1.0255 - accuracy: 0.5410 - val_loss: 1.0654 - val_accuracy: 0.5133
Epoch 7/64
28/28 [==============================] - ETA: 0s - loss: 1.0430 - accuracy: 0.5421
Epoch 7: val_accuracy improved from 0.55167 to 0.56333, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 135s 5s/step - loss: 1.0430 - accuracy: 0.5421 - val_loss: 1.0570 - val_accuracy: 0.5633
Epoch 8/64
28/28 [==============================] - ETA: 0s - loss: 0.9483 - accuracy: 0.6196
Epoch 8: val_accuracy improved from 0.56333 to 0.75667, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 139s 5s/step - loss: 0.9483 - accuracy: 0.6196 - val_loss: 0.8011 - val_accuracy: 0.7567
Epoch 9/64
28/28 [==============================] - ETA: 0s - loss: 0.8613 - accuracy: 0.6955
Epoch 9: val_accuracy improved from 0.75667 to 0.77167, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 137s 5s/step - loss: 0.8613 - accuracy: 0.6955 - val_loss: 0.7517 - val_accuracy: 0.7717
Epoch 10/64
28/28 [==============================] - ETA: 0s - loss: 0.7202 - accuracy: 0.7617
Epoch 10: val_accuracy improved from 0.77167 to 0.80000, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 136s 5s/step - loss: 0.7202 - accuracy: 0.7617 - val_loss: 0.7011 - val_accuracy: 0.8000
Epoch 11/64
28/28 [==============================] - ETA: 0s - loss: 0.6575 - accuracy: 0.7866
Epoch 11: val_accuracy improved from 0.80000 to 0.82167, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 137s 5s/step - loss: 0.6575 - accuracy: 0.7866 - val_loss: 0.5587 - val_accuracy: 0.8217
Epoch 12/64
28/28 [==============================] - ETA: 0s - loss: 0.6129 - accuracy: 0.8067
Epoch 12: val_accuracy did not improve from 0.82167
28/28 [==============================] - 134s 5s/step - loss: 0.6129 - accuracy: 0.8067 - val_loss: 0.6474 - val_accuracy: 0.7933
Epoch 13/64
28/28 [==============================] - ETA: 0s - loss: 0.5594 - accuracy: 0.8287
Epoch 13: val_accuracy did not improve from 0.82167
28/28 [==============================] - 134s 5s/step - loss: 0.5594 - accuracy: 0.8287 - val_loss: 0.5744 - val_accuracy: 0.8133
Epoch 14/64
28/28 [==============================] - ETA: 0s - loss: 0.6344 - accuracy: 0.7833
Epoch 14: val_accuracy improved from 0.82167 to 0.83667, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 136s 5s/step - loss: 0.6344 - accuracy: 0.7833 - val_loss: 0.5330 - val_accuracy: 0.8367
Epoch 15/64
28/28 [==============================] - ETA: 0s - loss: 0.5724 - accuracy: 0.8104
Epoch 15: val_accuracy did not improve from 0.83667
28/28 [==============================] - 133s 5s/step - loss: 0.5724 - accuracy: 0.8104 - val_loss: 0.5153 - val_accuracy: 0.8350
Epoch 16/64
28/28 [==============================] - ETA: 0s - loss: 0.4788 - accuracy: 0.8349
Epoch 16: val_accuracy did not improve from 0.83667
28/28 [==============================] - 132s 5s/step - loss: 0.4788 - accuracy: 0.8349 - val_loss: 0.5560 - val_accuracy: 0.8083
Epoch 17/64
28/28 [==============================] - ETA: 0s - loss: 0.4577 - accuracy: 0.8455
Epoch 17: val_accuracy improved from 0.83667 to 0.86333, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 134s 5s/step - loss: 0.4577 - accuracy: 0.8455 - val_loss: 0.4236 - val_accuracy: 0.8633
Epoch 18/64
28/28 [==============================] - ETA: 0s - loss: 0.3968 - accuracy: 0.8682
Epoch 18: val_accuracy improved from 0.86333 to 0.86500, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 134s 5s/step - loss: 0.3968 - accuracy: 0.8682 - val_loss: 0.3867 - val_accuracy: 0.8650
Epoch 19/64
28/28 [==============================] - ETA: 0s - loss: 0.3698 - accuracy: 0.8748
Epoch 19: val_accuracy improved from 0.86500 to 0.87833, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 134s 5s/step - loss: 0.3698 - accuracy: 0.8748 - val_loss: 0.3858 - val_accuracy: 0.8783
Epoch 20/64
28/28 [==============================] - ETA: 0s - loss: 0.3805 - accuracy: 0.8712
Epoch 20: val_accuracy did not improve from 0.87833
28/28 [==============================] - 131s 5s/step - loss: 0.3805 - accuracy: 0.8712 - val_loss: 0.4452 - val_accuracy: 0.8583
Epoch 21/64
28/28 [==============================] - ETA: 0s - loss: 0.3868 - accuracy: 0.8642
Epoch 21: val_accuracy did not improve from 0.87833
28/28 [==============================] - 131s 5s/step - loss: 0.3868 - accuracy: 0.8642 - val_loss: 0.5524 - val_accuracy: 0.8300
Epoch 22/64
28/28 [==============================] - ETA: 0s - loss: 0.3459 - accuracy: 0.8777
Epoch 22: val_accuracy improved from 0.87833 to 0.89000, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 132s 5s/step - loss: 0.3459 - accuracy: 0.8777 - val_loss: 0.3197 - val_accuracy: 0.8900
Epoch 23/64
28/28 [==============================] - ETA: 0s - loss: 0.3567 - accuracy: 0.8836
Epoch 23: val_accuracy did not improve from 0.89000
28/28 [==============================] - 130s 5s/step - loss: 0.3567 - accuracy: 0.8836 - val_loss: 0.3187 - val_accuracy: 0.8867
Epoch 24/64
28/28 [==============================] - ETA: 0s - loss: 0.2873 - accuracy: 0.9070
Epoch 24: val_accuracy improved from 0.89000 to 0.89833, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 132s 5s/step - loss: 0.2873 - accuracy: 0.9070 - val_loss: 0.3097 - val_accuracy: 0.8983
Epoch 25/64
28/28 [==============================] - ETA: 0s - loss: 0.2809 - accuracy: 0.9056
Epoch 25: val_accuracy improved from 0.89833 to 0.90500, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 132s 5s/step - loss: 0.2809 - accuracy: 0.9056 - val_loss: 0.3030 - val_accuracy: 0.9050
Epoch 26/64
28/28 [==============================] - ETA: 0s - loss: 0.2626 - accuracy: 0.9059
Epoch 26: val_accuracy did not improve from 0.90500
28/28 [==============================] - 143s 5s/step - loss: 0.2626 - accuracy: 0.9059 - val_loss: 0.3411 - val_accuracy: 0.8967
Epoch 27/64
28/28 [==============================] - ETA: 0s - loss: 0.2625 - accuracy: 0.9085
Epoch 27: val_accuracy did not improve from 0.90500
28/28 [==============================] - 145s 5s/step - loss: 0.2625 - accuracy: 0.9085 - val_loss: 0.3043 - val_accuracy: 0.8983
Epoch 28/64
28/28 [==============================] - ETA: 0s - loss: 0.2444 - accuracy: 0.9202
Epoch 28: val_accuracy improved from 0.90500 to 0.90833, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 140s 5s/step - loss: 0.2444 - accuracy: 0.9202 - val_loss: 0.2827 - val_accuracy: 0.9083
Epoch 29/64
28/28 [==============================] - ETA: 0s - loss: 0.2056 - accuracy: 0.9367
Epoch 29: val_accuracy did not improve from 0.90833
28/28 [==============================] - 141s 5s/step - loss: 0.2056 - accuracy: 0.9367 - val_loss: 0.4865 - val_accuracy: 0.8600
Epoch 30/64
28/28 [==============================] - ETA: 0s - loss: 0.2426 - accuracy: 0.9224
Epoch 30: val_accuracy improved from 0.90833 to 0.92167, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 138s 5s/step - loss: 0.2426 - accuracy: 0.9224 - val_loss: 0.2582 - val_accuracy: 0.9217
Epoch 31/64
28/28 [==============================] - ETA: 0s - loss: 0.2149 - accuracy: 0.9275
Epoch 31: val_accuracy did not improve from 0.92167
28/28 [==============================] - 138s 5s/step - loss: 0.2149 - accuracy: 0.9275 - val_loss: 0.2912 - val_accuracy: 0.9117
Epoch 32/64
28/28 [==============================] - ETA: 0s - loss: 0.2234 - accuracy: 0.9206
Epoch 32: val_accuracy did not improve from 0.92167
28/28 [==============================] - 136s 5s/step - loss: 0.2234 - accuracy: 0.9206 - val_loss: 0.2308 - val_accuracy: 0.9217
Epoch 33/64
28/28 [==============================] - ETA: 0s - loss: 0.2173 - accuracy: 0.9217
Epoch 33: val_accuracy did not improve from 0.92167
28/28 [==============================] - 134s 5s/step - loss: 0.2173 - accuracy: 0.9217 - val_loss: 0.2668 - val_accuracy: 0.9183
Epoch 34/64
28/28 [==============================] - ETA: 0s - loss: 0.2361 - accuracy: 0.9224
Epoch 34: val_accuracy did not improve from 0.92167
28/28 [==============================] - 133s 5s/step - loss: 0.2361 - accuracy: 0.9224 - val_loss: 0.3098 - val_accuracy: 0.9117
Epoch 35/64
28/28 [==============================] - ETA: 0s - loss: 0.2047 - accuracy: 0.9261
Epoch 35: val_accuracy improved from 0.92167 to 0.93167, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 134s 5s/step - loss: 0.2047 - accuracy: 0.9261 - val_loss: 0.2514 - val_accuracy: 0.9317
Epoch 36/64
28/28 [==============================] - ETA: 0s - loss: 0.2003 - accuracy: 0.9374
Epoch 36: val_accuracy did not improve from 0.93167
28/28 [==============================] - 130s 5s/step - loss: 0.2003 - accuracy: 0.9374 - val_loss: 0.3186 - val_accuracy: 0.9050
Epoch 37/64
28/28 [==============================] - ETA: 0s - loss: 0.1961 - accuracy: 0.9337
Epoch 37: val_accuracy did not improve from 0.93167
28/28 [==============================] - 131s 5s/step - loss: 0.1961 - accuracy: 0.9337 - val_loss: 0.2606 - val_accuracy: 0.9150
Epoch 38/64
28/28 [==============================] - ETA: 0s - loss: 0.2004 - accuracy: 0.9392
Epoch 38: val_accuracy did not improve from 0.93167
28/28 [==============================] - 130s 5s/step - loss: 0.2004 - accuracy: 0.9392 - val_loss: 0.2357 - val_accuracy: 0.9283
Epoch 39/64
28/28 [==============================] - ETA: 0s - loss: 0.1940 - accuracy: 0.9389
Epoch 39: val_accuracy did not improve from 0.93167
28/28 [==============================] - 130s 5s/step - loss: 0.1940 - accuracy: 0.9389 - val_loss: 0.3238 - val_accuracy: 0.8983
Epoch 40/64
28/28 [==============================] - ETA: 0s - loss: 0.2728 - accuracy: 0.9158
Epoch 40: val_accuracy did not improve from 0.93167
28/28 [==============================] - 130s 5s/step - loss: 0.2728 - accuracy: 0.9158 - val_loss: 0.2773 - val_accuracy: 0.9217
Epoch 41/64
28/28 [==============================] - ETA: 0s - loss: 0.1861 - accuracy: 0.9374
Epoch 41: val_accuracy did not improve from 0.93167
28/28 [==============================] - 131s 5s/step - loss: 0.1861 - accuracy: 0.9374 - val_loss: 0.2989 - val_accuracy: 0.9150
Epoch 42/64
28/28 [==============================] - ETA: 0s - loss: 0.1809 - accuracy: 0.9392
Epoch 42: val_accuracy improved from 0.93167 to 0.94167, saving model to /content/drive/Shareddrives/Computer Vision Final Project/CervicalCancer/cervical_cancer_best_model_VGG16.hdf5
28/28 [==============================] - 134s 5s/step - loss: 0.1809 - accuracy: 0.9392 - val_loss: 0.2021 - val_accuracy: 0.9417
Epoch 43/64
28/28 [==============================] - ETA: 0s - loss: 0.2251 - accuracy: 0.9202
Epoch 43: val_accuracy did not improve from 0.94167
28/28 [==============================] - 131s 5s/step - loss: 0.2251 - accuracy: 0.9202 - val_loss: 0.3542 - val_accuracy: 0.8967
Epoch 44/64
28/28 [==============================] - ETA: 0s - loss: 0.2330 - accuracy: 0.9213
Epoch 44: val_accuracy did not improve from 0.94167
28/28 [==============================] - 130s 5s/step - loss: 0.2330 - accuracy: 0.9213 - val_loss: 0.2490 - val_accuracy: 0.9167
Epoch 45/64
28/28 [==============================] - ETA: 0s - loss: 0.1981 - accuracy: 0.9323
Epoch 45: val_accuracy did not improve from 0.94167
28/28 [==============================] - 130s 5s/step - loss: 0.1981 - accuracy: 0.9323 - val_loss: 0.2472 - val_accuracy: 0.9250
Epoch 46/64
28/28 [==============================] - ETA: 0s - loss: 0.1617 - accuracy: 0.9464
Epoch 46: val_accuracy did not improve from 0.94167
28/28 [==============================] - 133s 5s/step - loss: 0.1617 - accuracy: 0.9464 - val_loss: 0.2193 - val_accuracy: 0.9267
Epoch 47/64
28/28 [==============================] - ETA: 0s - loss: 0.1950 - accuracy: 0.9392
Epoch 47: val_accuracy did not improve from 0.94167
28/28 [==============================] - 130s 5s/step - loss: 0.1950 - accuracy: 0.9392 - val_loss: 0.2832 - val_accuracy: 0.9150
Epoch 48/64
28/28 [==============================] - ETA: 0s - loss: 0.1541 - accuracy: 0.9473
Epoch 48: val_accuracy did not improve from 0.94167
28/28 [==============================] - 130s 5s/step - loss: 0.1541 - accuracy: 0.9473 - val_loss: 0.2593 - val_accuracy: 0.9200
Epoch 49/64
28/28 [==============================] - ETA: 0s - loss: 0.1946 - accuracy: 0.9352
Epoch 49: val_accuracy did not improve from 0.94167
28/28 [==============================] - 131s 5s/step - loss: 0.1946 - accuracy: 0.9352 - val_loss: 0.2605 - val_accuracy: 0.9033
Epoch 50/64
28/28 [==============================] - ETA: 0s - loss: 0.1669 - accuracy: 0.9440
Epoch 50: val_accuracy did not improve from 0.94167
28/28 [==============================] - 131s 5s/step - loss: 0.1669 - accuracy: 0.9440 - val_loss: 0.2459 - val_accuracy: 0.9350
Epoch 51/64
28/28 [==============================] - ETA: 0s - loss: 0.1518 - accuracy: 0.9462
Epoch 51: val_accuracy did not improve from 0.94167
28/28 [==============================] - 130s 5s/step - loss: 0.1518 - accuracy: 0.9462 - val_loss: 0.3113 - val_accuracy: 0.9167
Epoch 52/64
28/28 [==============================] - ETA: 0s - loss: 0.1858 - accuracy: 0.9389
Epoch 52: val_accuracy did not improve from 0.94167
28/28 [==============================] - 130s 5s/step - loss: 0.1858 - accuracy: 0.9389 - val_loss: 0.2081 - val_accuracy: 0.9350
Epoch 53/64
28/28 [==============================] - ETA: 0s - loss: 0.1270 - accuracy: 0.9531
Epoch 53: val_accuracy did not improve from 0.94167
28/28 [==============================] - 131s 5s/step - loss: 0.1270 - accuracy: 0.9531 - val_loss: 0.2214 - val_accuracy: 0.9383
Epoch 54/64
28/28 [==============================] - ETA: 0s - loss: 0.1173 - accuracy: 0.9612
Epoch 54: val_accuracy did not improve from 0.94167
28/28 [==============================] - 130s 5s/step - loss: 0.1173 - accuracy: 0.9612 - val_loss: 0.2257 - val_accuracy: 0.9283
Epoch 55/64
28/28 [==============================] - ETA: 0s - loss: 0.1158 - accuracy: 0.9605
Epoch 55: val_accuracy did not improve from 0.94167
28/28 [==============================] - 130s 5s/step - loss: 0.1158 - accuracy: 0.9605 - val_loss: 0.2329 - val_accuracy: 0.9217
Epoch 56/64
28/28 [==============================] - ETA: 0s - loss: 0.1187 - accuracy: 0.9619
Epoch 56: val_accuracy did not improve from 0.94167
28/28 [==============================] - 130s 5s/step - loss: 0.1187 - accuracy: 0.9619 - val_loss: 0.1896 - val_accuracy: 0.9417
Epoch 57/64
28/28 [==============================] - ETA: 0s - loss: 0.1369 - accuracy: 0.9502
Epoch 57: val_accuracy did not improve from 0.94167
28/28 [==============================] - 132s 5s/step - loss: 0.1369 - accuracy: 0.9502 - val_loss: 0.2053 - val_accuracy: 0.9283
Epoch 58/64
28/28 [==============================] - ETA: 0s - loss: 0.1636 - accuracy: 0.9440
Epoch 58: val_accuracy did not improve from 0.94167
28/28 [==============================] - 131s 5s/step - loss: 0.1636 - accuracy: 0.9440 - val_loss: 0.2363 - val_accuracy: 0.9200
Epoch 59/64
28/28 [==============================] - ETA: 0s - loss: 0.1537 - accuracy: 0.9477
Epoch 59: val_accuracy did not improve from 0.94167
28/28 [==============================] - 130s 5s/step - loss: 0.1537 - accuracy: 0.9477 - val_loss: 0.2090 - val_accuracy: 0.9317
Epoch 60/64
28/28 [==============================] - ETA: 0s - loss: 0.1249 - accuracy: 0.9546
Epoch 60: val_accuracy did not improve from 0.94167
28/28 [==============================] - 130s 5s/step - loss: 0.1249 - accuracy: 0.9546 - val_loss: 0.2931 - val_accuracy: 0.9167
Epoch 61/64
28/28 [==============================] - ETA: 0s - loss: 0.1260 - accuracy: 0.9572
Epoch 61: val_accuracy did not improve from 0.94167
28/28 [==============================] - 130s 5s/step - loss: 0.1260 - accuracy: 0.9572 - val_loss: 0.2401 - val_accuracy: 0.9233
Epoch 62/64
28/28 [==============================] - ETA: 0s - loss: 0.1563 - accuracy: 0.9520
Epoch 62: val_accuracy did not improve from 0.94167
28/28 [==============================] - 129s 5s/step - loss: 0.1563 - accuracy: 0.9520 - val_loss: 0.2403 - val_accuracy: 0.9283
Epoch 63/64
28/28 [==============================] - ETA: 0s - loss: 0.1195 - accuracy: 0.9590
Epoch 63: val_accuracy did not improve from 0.94167
28/28 [==============================] - 130s 5s/step - loss: 0.1195 - accuracy: 0.9590 - val_loss: 0.1960 - val_accuracy: 0.9350
Epoch 64/64
28/28 [==============================] - ETA: 0s - loss: 0.0955 - accuracy: 0.9674
Epoch 64: val_accuracy did not improve from 0.94167
28/28 [==============================] - 130s 5s/step - loss: 0.0955 - accuracy: 0.9674 - val_loss: 0.2180 - val_accuracy: 0.9300
In [ ]:
from tensorflow.keras.models import Model, load_model
# Loading the Best Fit Model
model = load_model(root_dir + "/cervical_cancer_best_model_VGG16.hdf5")
In [ ]:
# Evaluate the model on the test set
test_loss, test_accuracy = model.evaluate(test_data, steps=test_data.samples // test_data.batch_size)

print(f"Test Loss: {test_loss}")
print(f"Test Accuracy: {test_accuracy}")
6/6 [==============================] - 159s 31s/step - loss: 0.1480 - accuracy: 0.9583
Test Loss: 0.1479993760585785
Test Accuracy: 0.9583333134651184
In [ ]:
import matplotlib.pyplot as plt

# Plot training & validation accuracy values
plt.figure(figsize=(12, 4))

plt.subplot(1, 2, 1)
plt.plot(cnn.history['accuracy'], label='Train Accuracy')
plt.plot(cnn.history['val_accuracy'], label='Validation Accuracy')
plt.title('Model Accuracy')
plt.ylabel('Accuracy')
plt.xlabel('Epoch')
plt.legend(loc='upper left')

# Plot training & validation loss values
plt.subplot(1, 2, 2)
plt.plot(cnn.history['loss'], label='Train Loss')
plt.plot(cnn.history['val_loss'], label='Validation Loss')
plt.title('Model Loss')
plt.ylabel('Loss')
plt.xlabel('Epoch')
plt.legend(loc='upper left')

plt.show()
In [ ]:
import numpy as np
from sklearn.metrics import classification_report, confusion_matrix

# Get the true labels and predictions
y_true = test_data.classes

# Predict the probabilities for each class
y_pred = model.predict(test_data)

# Convert probabilities to class labels using np.argmax
y_pred_classes = np.argmax(y_pred, axis=1)

# Calculate the classification report
report = classification_report(y_true, y_pred_classes, target_names=test_data.class_indices.keys())
print("Classification Report:\n", report)

# Calculate and print the confusion matrix
cm = confusion_matrix(y_true, y_pred_classes)
print("Confusion Matrix:\n", cm)

Summary on VGG Model

  1. Model Accuracy and Loss Curves Accuracy Curves:

Training Accuracy (Blue Line): Shows a steady increase from around 20% to

about 95% over 64 epochs.

Validation Accuracy (Orange Line): Also increases steadily, mirroring the training accuracy and stabilizing around 90-95%.

Observation: The training and validation accuracies are closely aligned, indicating that the model is learning well and there is no significant overfitting or underfitting.

Loss Curves:

Training Loss (Blue Line): Decreases sharply initially and then continues to decrease gradually, stabilizing around 0.2.

Validation Loss (Orange Line): Also decreases in a similar pattern to the training loss, stabilizing around 0.5.

Observation: The validation loss is slightly higher than the training loss, which is typical, but there is no significant divergence, suggesting good generalization.

  1. Model Evaluation Metrics

Test Loss: 0.1480

Test Accuracy: 95.83%

Observation: These results indicate that the model performs very well on the test set, maintaining high accuracy and low loss.

In [ ]: